route.ts 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
  2. import { getServerSideConfig } from "@/app/config/server";
  3. import {
  4. ANTHROPIC_BASE_URL,
  5. Anthropic,
  6. ApiPath,
  7. DEFAULT_MODELS,
  8. ModelProvider,
  9. OpenaiPath,
  10. } from "@/app/constant";
  11. import { prettyObject } from "@/app/utils/format";
  12. import { NextRequest, NextResponse } from "next/server";
  13. import { auth } from "../../auth";
  14. import { requestOpenai } from "../../common";
  15. import { collectModelTable } from "@/app/utils/model";
  16. const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
  17. async function handle(
  18. req: NextRequest,
  19. { params }: { params: { path: string[] } },
  20. ) {
  21. console.log("[Anthropic Route] params ", params);
  22. if (req.method === "OPTIONS") {
  23. return NextResponse.json({ body: "OK" }, { status: 200 });
  24. }
  25. const subpath = params.path.join("/");
  26. if (!ALLOWD_PATH.has(subpath)) {
  27. console.log("[Anthropic Route] forbidden path ", subpath);
  28. return NextResponse.json(
  29. {
  30. error: true,
  31. msg: "you are not allowed to request " + subpath,
  32. },
  33. {
  34. status: 403,
  35. },
  36. );
  37. }
  38. const authResult = auth(req, ModelProvider.Claude);
  39. if (authResult.error) {
  40. return NextResponse.json(authResult, {
  41. status: 401,
  42. });
  43. }
  44. try {
  45. const response = await request(req);
  46. return response;
  47. } catch (e) {
  48. console.error("[Anthropic] ", e);
  49. return NextResponse.json(prettyObject(e));
  50. }
  51. }
  52. export const GET = handle;
  53. export const POST = handle;
  54. export const runtime = "edge";
  55. export const preferredRegion = [
  56. "arn1",
  57. "bom1",
  58. "cdg1",
  59. "cle1",
  60. "cpt1",
  61. "dub1",
  62. "fra1",
  63. "gru1",
  64. "hnd1",
  65. "iad1",
  66. "icn1",
  67. "kix1",
  68. "lhr1",
  69. "pdx1",
  70. "sfo1",
  71. "sin1",
  72. "syd1",
  73. ];
  74. const serverConfig = getServerSideConfig();
  75. export async function request(req: NextRequest) {
  76. const controller = new AbortController();
  77. let authHeaderName = "x-api-key";
  78. let authValue =
  79. req.headers.get(authHeaderName) ||
  80. req.headers.get("Authorization")?.replaceAll("Bearer ", "").trim() ||
  81. serverConfig.anthropicApiKey ||
  82. "";
  83. let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Anthropic, "");
  84. let baseUrl =
  85. serverConfig.anthropicUrl || serverConfig.baseUrl || ANTHROPIC_BASE_URL;
  86. if (!baseUrl.startsWith("http")) {
  87. baseUrl = `https://${baseUrl}`;
  88. }
  89. if (baseUrl.endsWith("/")) {
  90. baseUrl = baseUrl.slice(0, -1);
  91. }
  92. console.log("[Proxy] ", path);
  93. console.log("[Base Url]", baseUrl);
  94. const timeoutId = setTimeout(
  95. () => {
  96. controller.abort();
  97. },
  98. 10 * 60 * 1000,
  99. );
  100. const fetchUrl = `${baseUrl}${path}`;
  101. const fetchOptions: RequestInit = {
  102. headers: {
  103. "Content-Type": "application/json",
  104. // "Cache-Control": "no-store",
  105. [authHeaderName]: authValue,
  106. "anthropic-version":
  107. req.headers.get("anthropic-version") ||
  108. serverConfig.anthropicApiVersion ||
  109. Anthropic.Vision,
  110. },
  111. method: req.method,
  112. body: req.body,
  113. redirect: "manual",
  114. // @ts-ignore
  115. duplex: "half",
  116. signal: controller.signal,
  117. };
  118. // #1815 try to refuse gpt4 request
  119. if (serverConfig.customModels && req.body) {
  120. try {
  121. const modelTable = collectModelTable(
  122. DEFAULT_MODELS,
  123. serverConfig.customModels,
  124. );
  125. const clonedBody = await req.text();
  126. fetchOptions.body = clonedBody;
  127. const jsonBody = JSON.parse(clonedBody) as { model?: string };
  128. // not undefined and is false
  129. if (modelTable[jsonBody?.model ?? ""].available === false) {
  130. return NextResponse.json(
  131. {
  132. error: true,
  133. message: `you are not allowed to use ${jsonBody?.model} model`,
  134. },
  135. {
  136. status: 403,
  137. },
  138. );
  139. }
  140. } catch (e) {
  141. console.error("[OpenAI] gpt4 filter", e);
  142. }
  143. }
  144. console.log("[Anthropic request]", fetchOptions.headers, req.method);
  145. try {
  146. const res = await fetch(fetchUrl, fetchOptions);
  147. console.log(
  148. "[Anthropic response]",
  149. res.status,
  150. " ",
  151. res.headers,
  152. res.url,
  153. );
  154. // to prevent browser prompt for credentials
  155. const newHeaders = new Headers(res.headers);
  156. newHeaders.delete("www-authenticate");
  157. // to disable nginx buffering
  158. newHeaders.set("X-Accel-Buffering", "no");
  159. // The latest version of the OpenAI API forced the content-encoding to be "br" in json response
  160. // So if the streaming is disabled, we need to remove the content-encoding header
  161. // Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
  162. // The browser will try to decode the response with brotli and fail
  163. newHeaders.delete("content-encoding");
  164. return new Response(res.body, {
  165. status: res.status,
  166. statusText: res.statusText,
  167. headers: newHeaders,
  168. });
  169. } finally {
  170. clearTimeout(timeoutId);
  171. }
  172. }